func runtime.add
272 uses
runtime (current package)
alg.go#L255: h = typehash(a.Elem, add(p, i*a.Elem.Size_), h)
alg.go#L264: h = typehash(f.Typ, add(p, f.Offset), h)
alg.go#L320: if err := mapKeyError2(a.Elem, add(p, i*a.Elem.Size_)); err != nil {
alg.go#L331: if err := mapKeyError2(f.Typ, add(p, f.Offset)); err != nil {
arena.go#L545: userArenaHeapBitsSetType(typ, add(ptr, uintptr(i)*typ.Size_), s)
cgocall.go#L599: p = add(p, at.Elem.Size_)
cgocall.go#L625: p = *(*unsafe.Pointer)(add(p, goarch.PtrSize))
cgocall.go#L648: p = add(p, st.Elem.Size_)
cgocall.go#L671: cgoCheckArg(f.Typ, add(p, f.Offset), true, top, msg)
cgocheck.go#L126: p = add(p, typ.Size_)
cgocheck.go#L154: cgoCheckBits(add(src, -doff), datap.gcdatamask.bytedata, off+doff, size)
cgocheck.go#L159: cgoCheckBits(add(src, -boff), datap.gcbssmask.bytedata, off+boff, size)
cgocheck.go#L203: src = add(src, skipBytes)
cgocheck.go#L218: v := *(*unsafe.Pointer)(add(src, i))
cgocheck.go#L261: src = add(src, at.Elem.Size_)
cgocheck.go#L279: src = add(src, f.Typ.Size_)
chan.go#L104: c.buf = add(unsafe.Pointer(c), hchanSize)
chan.go#L134: return add(c.buf, uintptr(i)*uintptr(c.elemsize))
checkptr.go#L42: end := add(ptr, size-1)
hash64.go#L29: a |= uintptr(*(*byte)(add(p, s>>1))) << 8
hash64.go#L30: a |= uintptr(*(*byte)(add(p, s-1))) << 16
hash64.go#L36: b = r4(add(p, s-4))
hash64.go#L42: b = r8(add(p, s-8))
hash64.go#L49: seed = mix(r8(p)^hashkey[1], r8(add(p, 8))^seed)
hash64.go#L50: seed1 = mix(r8(add(p, 16))^hashkey[2], r8(add(p, 24))^seed1)
hash64.go#L51: seed2 = mix(r8(add(p, 32))^hashkey[3], r8(add(p, 40))^seed2)
hash64.go#L52: p = add(p, 48)
hash64.go#L57: seed = mix(r8(p)^hashkey[1], r8(add(p, 8))^seed)
hash64.go#L58: p = add(p, 16)
hash64.go#L60: a = r8(add(p, l-16))
hash64.go#L61: b = r8(add(p, l-8))
iface.go#L114: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L175: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L215: xmhdr := (*[1 << 16]abi.Method)(add(unsafe.Pointer(x), uintptr(x.Moff)))[:nt:nt]
iface.go#L369: x = add(x, 6)
iface.go#L382: x = add(x, 4)
iface.go#L687: m := *(**itab)(add(unsafe.Pointer(&t.entries), i*goarch.PtrSize))
malloc.go#L1169: x = add(x, mallocHeaderSize)
malloc.go#L1321: x = add(x, size-dataSize)
map.go#L202: return *(**bmap)(add(unsafe.Pointer(b), uintptr(t.BucketSize)-goarch.PtrSize))
map.go#L206: *(**bmap)(add(unsafe.Pointer(b), uintptr(t.BucketSize)-goarch.PtrSize)) = ovf
map.go#L210: return add(unsafe.Pointer(b), dataOffset)
map.go#L247: h.extra.nextOverflow = (*bmap)(add(unsafe.Pointer(ovf), uintptr(t.BucketSize)))
map.go#L397: nextOverflow = (*bmap)(add(buckets, base*uintptr(t.BucketSize)))
map.go#L398: last := (*bmap)(add(buckets, (nbuckets-1)*uintptr(t.BucketSize)))
map.go#L433: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L439: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L454: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L459: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L503: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L509: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L524: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L529: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L547: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L553: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L568: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L573: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L649: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map.go#L661: insertk = add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L662: elem = add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L669: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L680: elem = add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L703: insertk = add(unsafe.Pointer(newb), dataOffset)
map.go#L704: elem = add(insertk, abi.MapBucketCount*uintptr(t.KeySize))
map.go#L774: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map.go#L786: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L800: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L966: b = (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map.go#L970: b = (*bmap)(add(it.buckets, bucket*uintptr(t.BucketSize)))
map.go#L974: b = (*bmap)(add(it.buckets, bucket*uintptr(t.BucketSize)))
map.go#L991: k := add(unsafe.Pointer(b), dataOffset+uintptr(offi)*uintptr(t.KeySize))
map.go#L995: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+uintptr(offi)*uintptr(t.ValueSize))
map.go#L1095: b := (*bmap)(add(bucket, i*uintptr(t.BucketSize)))
map.go#L1243: b := (*bmap)(add(h.oldbuckets, bucket*uintptr(t.BucketSize)))
map.go#L1256: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map.go#L1265: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map.go#L1266: x.k = add(unsafe.Pointer(x.b), dataOffset)
map.go#L1267: x.e = add(x.k, abi.MapBucketCount*uintptr(t.KeySize))
map.go#L1273: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map.go#L1274: y.k = add(unsafe.Pointer(y.b), dataOffset)
map.go#L1275: y.e = add(y.k, abi.MapBucketCount*uintptr(t.KeySize))
map.go#L1279: k := add(unsafe.Pointer(b), dataOffset)
map.go#L1280: e := add(k, abi.MapBucketCount*uintptr(t.KeySize))
map.go#L1281: for i := 0; i < abi.MapBucketCount; i, k, e = i+1, add(k, uintptr(t.KeySize)), add(e, uintptr(t.ValueSize)) {
map.go#L1330: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map.go#L1331: dst.e = add(dst.k, abi.MapBucketCount*uintptr(t.KeySize))
map.go#L1349: dst.k = add(dst.k, uintptr(t.KeySize))
map.go#L1350: dst.e = add(dst.e, uintptr(t.ValueSize))
map.go#L1355: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map.go#L1358: ptr := add(b, dataOffset)
map.go#L1641: srcK := add(unsafe.Pointer(src), dataOffset+uintptr(i)*uintptr(t.KeySize))
map.go#L1642: srcEle := add(unsafe.Pointer(src), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+uintptr(i)*uintptr(t.ValueSize))
map.go#L1643: dstK := add(unsafe.Pointer(dst), dataOffset+uintptr(pos)*uintptr(t.KeySize))
map.go#L1644: dstEle := add(unsafe.Pointer(dst), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+uintptr(pos)*uintptr(t.ValueSize))
map.go#L1712: dstBmap := (*bmap)(add(dst.buckets, uintptr(i*int(t.BucketSize))))
map.go#L1715: srcBmap := (*bmap)(add(src.buckets, uintptr((i+j)*int(t.BucketSize))))
map.go#L1735: srcBmap := (*bmap)(add(srcOldbuckets, uintptr(i*int(t.BucketSize))))
map.go#L1741: dstBmap := (*bmap)(add(dst.buckets, (uintptr(i)&bucketMask(dst.B))*uintptr(t.BucketSize)))
map.go#L1766: srcK := add(unsafe.Pointer(srcBmap), dataOffset+i*uintptr(t.KeySize))
map.go#L1771: srcEle := add(unsafe.Pointer(srcBmap), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L1806: b := (*bmap)(add(buckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1814: b := (*bmap)(add(h.oldbuckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1834: k := add(unsafe.Pointer(b), dataOffset+offi*uintptr(t.KeySize))
map.go#L1841: typedmemmove(t.Key, add(s.array, uintptr(s.len)*uintptr(t.Key.Size())), k)
map.go#L1869: b := (*bmap)(add(buckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1877: b := (*bmap)(add(h.oldbuckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1899: ele := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*uintptr(t.KeySize)+offi*uintptr(t.ValueSize))
map.go#L1906: typedmemmove(t.Elem, add(s.array, uintptr(s.len)*uintptr(t.Elem.Size())), ele)
map_fast32.go#L31: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L37: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L44: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 4) {
map_fast32.go#L46: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*4+i*uintptr(t.ValueSize))
map_fast32.go#L80: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L86: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L93: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 4) {
map_fast32.go#L95: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*4+i*uintptr(t.ValueSize)), true
map_fast32.go#L138: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L157: k := *((*uint32)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L188: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L195: elem := add(unsafe.Pointer(insertb), dataOffset+abi.MapBucketCount*4+inserti*uintptr(t.ValueSize))
map_fast32.go#L237: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L256: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L287: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L294: elem := add(unsafe.Pointer(insertb), dataOffset+abi.MapBucketCount*4+inserti*uintptr(t.ValueSize))
map_fast32.go#L323: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L327: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 4) {
map_fast32.go#L339: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*4+i*uintptr(t.ValueSize))
map_fast32.go#L404: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_fast32.go#L413: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_fast32.go#L414: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast32.go#L415: x.e = add(x.k, abi.MapBucketCount*4)
map_fast32.go#L421: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_fast32.go#L422: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast32.go#L423: y.e = add(y.k, abi.MapBucketCount*4)
map_fast32.go#L427: k := add(unsafe.Pointer(b), dataOffset)
map_fast32.go#L428: e := add(k, abi.MapBucketCount*4)
map_fast32.go#L429: for i := 0; i < abi.MapBucketCount; i, k, e = i+1, add(k, 4), add(e, uintptr(t.ValueSize)) {
map_fast32.go#L454: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast32.go#L455: dst.e = add(dst.k, abi.MapBucketCount*4)
map_fast32.go#L473: dst.k = add(dst.k, 4)
map_fast32.go#L474: dst.e = add(dst.e, uintptr(t.ValueSize))
map_fast32.go#L479: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_fast32.go#L482: ptr := add(b, dataOffset)
map_fast64.go#L31: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L37: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L44: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 8) {
map_fast64.go#L46: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*8+i*uintptr(t.ValueSize))
map_fast64.go#L80: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L86: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L93: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 8) {
map_fast64.go#L95: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*8+i*uintptr(t.ValueSize)), true
map_fast64.go#L138: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L157: k := *((*uint64)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L188: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L195: elem := add(unsafe.Pointer(insertb), dataOffset+abi.MapBucketCount*8+inserti*uintptr(t.ValueSize))
map_fast64.go#L239: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L258: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L289: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L296: elem := add(unsafe.Pointer(insertb), dataOffset+abi.MapBucketCount*8+inserti*uintptr(t.ValueSize))
map_fast64.go#L325: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L329: for i, k := uintptr(0), b.keys(); i < abi.MapBucketCount; i, k = i+1, add(k, 8) {
map_fast64.go#L343: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*8+i*uintptr(t.ValueSize))
map_fast64.go#L408: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_fast64.go#L417: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_fast64.go#L418: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast64.go#L419: x.e = add(x.k, abi.MapBucketCount*8)
map_fast64.go#L425: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_fast64.go#L426: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast64.go#L427: y.e = add(y.k, abi.MapBucketCount*8)
map_fast64.go#L431: k := add(unsafe.Pointer(b), dataOffset)
map_fast64.go#L432: e := add(k, abi.MapBucketCount*8)
map_fast64.go#L433: for i := 0; i < abi.MapBucketCount; i, k, e = i+1, add(k, 8), add(e, uintptr(t.ValueSize)) {
map_fast64.go#L458: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast64.go#L459: dst.e = add(dst.k, abi.MapBucketCount*8)
map_fast64.go#L483: dst.k = add(dst.k, 8)
map_fast64.go#L484: dst.e = add(dst.e, uintptr(t.ValueSize))
map_fast64.go#L489: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_fast64.go#L492: ptr := add(b, dataOffset)
map_faststr.go#L30: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L39: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L46: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L55: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L62: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L72: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
map_faststr.go#L74: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize))
map_faststr.go#L82: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L88: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L95: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L101: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L134: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L143: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L150: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L159: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L166: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L176: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
map_faststr.go#L178: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize)), true
map_faststr.go#L186: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L192: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L199: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L205: return add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L249: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_faststr.go#L269: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+i*2*goarch.PtrSize))
map_faststr.go#L307: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*2*goarch.PtrSize)
map_faststr.go#L313: elem := add(unsafe.Pointer(insertb), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+inserti*uintptr(t.ValueSize))
map_faststr.go#L343: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_faststr.go#L348: for i, kptr := uintptr(0), b.keys(); i < abi.MapBucketCount; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L358: e := add(unsafe.Pointer(b), dataOffset+abi.MapBucketCount*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L423: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_faststr.go#L432: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_faststr.go#L433: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_faststr.go#L434: x.e = add(x.k, abi.MapBucketCount*2*goarch.PtrSize)
map_faststr.go#L440: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_faststr.go#L441: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_faststr.go#L442: y.e = add(y.k, abi.MapBucketCount*2*goarch.PtrSize)
map_faststr.go#L446: k := add(unsafe.Pointer(b), dataOffset)
map_faststr.go#L447: e := add(k, abi.MapBucketCount*2*goarch.PtrSize)
map_faststr.go#L448: for i := 0; i < abi.MapBucketCount; i, k, e = i+1, add(k, 2*goarch.PtrSize), add(e, uintptr(t.ValueSize)) {
map_faststr.go#L473: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_faststr.go#L474: dst.e = add(dst.k, abi.MapBucketCount*2*goarch.PtrSize)
map_faststr.go#L487: dst.k = add(dst.k, 2*goarch.PtrSize)
map_faststr.go#L488: dst.e = add(dst.e, uintptr(t.ValueSize))
map_faststr.go#L493: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_faststr.go#L496: ptr := add(b, dataOffset)
mbitmap.go#L739: gctyp.GCData = (*byte)(add(unsafe.Pointer(progSpan.base()), heapBitsOff))
mgcmark.go#L276: ptrmask := (*uint8)(add(unsafe.Pointer(ptrmask0), uintptr(shard)*(rootBlockBytes/(8*goarch.PtrSize))))
mgcsweep.go#L970: *(*uint32)(add(x, i)) = 0xdeadbeef
mpagealloc_64bit.go#L122: offAddr{uintptr(add(base, baseOffset))},
mpagealloc_64bit.go#L123: offAddr{uintptr(add(base, limitOffset))},
mprof.go#L248: stk := (*[maxProfStackDepth]uintptr)(add(unsafe.Pointer(b), unsafe.Sizeof(*b)))
mprof.go#L261: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mprof.go#L270: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mspanset.go#L299: return (*atomic.Pointer[spanSetBlock])(add(s.p, goarch.PtrSize*idx))
netpoll.go#L700: pd := (*pollDesc)(add(mem, i*pdSize))
os_linux.go#L245: auxvp := (*[1 << 28]uintptr)(add(unsafe.Pointer(argv), uintptr(n)*goarch.PtrSize))
panic.go#L696: fd = add(fd, unsafe.Sizeof(b))
panic.go#L837: p.deferBitsPtr = (*byte)(add(sp, s.deferBitsOffset))
panic.go#L838: p.slotsPtr = add(sp, s.slotsOffset)
panic.go#L878: p.argp = add(p.startSP, sys.MinFrameSize)
panic.go#L904: return *(*func())(add(p.slotsPtr, i*goarch.PtrSize)), true
panic.go#L992: deferBitsPtr := (*uint8)(add(varp, -uintptr(deferBitsOffset)))
panic.go#L1001: p.slotsPtr = add(varp, -uintptr(slotsOffset))
proc.go#L687: return *(**g)(add(unsafe.Pointer(ptr), i*goarch.PtrSize))
proc.go#L7287: firstFunc := add(unsafe.Pointer(t), 8)
proc.go#L7289: p := add(firstFunc, uintptr(i)*goarch.PtrSize)
runtime1.go#L63: return *(**byte)(add(unsafe.Pointer(argv), uintptr(i)*goarch.PtrSize))
signal_linux_amd64.go#L55: *(*uintptr)(add(unsafe.Pointer(c.info), 2*goarch.PtrSize)) = uintptr(x)
slice.go#L59: memclrNoHeapPointers(add(to, copymem), tomem-copymem)
slice.go#L269: memclrNoHeapPointers(add(p, newlenmem), capmem-newlenmem)
slice.go#L344: memclrNoHeapPointers(add(new.array, oldcapmem), newlenmem-oldcapmem)
stack.go#L625: print(" ", add(scanp, (i+j)*goarch.PtrSize), ":", ptrnames[bv.ptrbit(i+j)], ":", hex(*(*uintptr)(add(scanp, (i+j)*goarch.PtrSize))), " # ", i, " ", *addb(bv.bytedata, i/8), "\n")
stack.go#L632: pp := (*uintptr)(add(scanp, (i+j)*goarch.PtrSize))
stkframe.go#L248: p = add(p, goarch.PtrSize)
string.go#L113: p = add(p, 7)
string.go#L286: memclrNoHeapPointers(add(p, uintptr(size)), cap-uintptr(size))
string.go#L301: memclrNoHeapPointers(add(p, uintptr(size)*4), mem-uintptr(size)*4)
stubs.go#L24: func add(p unsafe.Pointer, x uintptr) unsafe.Pointer {
symtab.go#L889: ffb := (*findfuncbucket)(add(unsafe.Pointer(datap.findfunctab), b*unsafe.Sizeof(findfuncbucket{})))
symtab.go#L1179: return *(*uint32)(add(unsafe.Pointer(&f.nfuncdata), unsafe.Sizeof(f.nfuncdata)+uintptr(table)*4))
traceback.go#L676: bits := *(*uint8)(add(liveInfo, uintptr(liveIdx)+uintptr(slotIdx/8)))
traceback.go#L681: x := readUnaligned64(add(argp, uintptr(off)))
vdso_linux.go#L113: pt := (*elfPhdr)(add(pt, uintptr(i)*unsafe.Sizeof(elfPhdr{})))
vdso_linux.go#L192: aux := (*elfVerdaux)(add(unsafe.Pointer(def), uintptr(def.vd_aux)))
vdso_linux.go#L201: def = (*elfVerdef)(add(unsafe.Pointer(def), uintptr(def.vd_next)))
 |
The pages are generated with Golds v0.7.6. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @zigo_101 (reachable from the left QR code) to get the latest news of Golds. |